This is an R Markdown Notebook. When you execute code within the notebook, the results appear beneath the code.

Try executing this chunk by clicking the Run button within the chunk or by placing your cursor inside it and pressing Cmd+Shift+Enter.

Add a new chunk by clicking the Insert Chunk button on the toolbar or by pressing Cmd+Option+I.

When you save the notebook, an HTML file containing the code and output will be saved alongside it (click the Preview button or press Cmd+Shift+K to preview the HTML file).

The preview shows you a rendered HTML copy of the contents of the editor. Consequently, unlike Knit, Preview does not run any R code chunks. Instead, the output of the chunk when it was last run in the editor is displayed.

plot(decompose(AirPassengers, type = "multiplicative"))

class(AirPassengers)
[1] "ts"
plot(AirPassengers)

beerprod <- scan("/Users/ansonleung/Desktop/Applied Financial Engineering/Financial Times Series Analysis/Lecture 1/R Code/beerprod.dat")
Read 72 items
beerprod = ts(beerprod, freq = 4)
plot(stl(beerprod, "periodic"))


trendpattern = filter(beerprod, filter = c(1/8, 1/4, 1/4, 1/4, 1/8), sides=2)
plot(beerprod, type= "b", main = "moving average annual trend")
lines(trendpattern)

seasonals = beerprod - trendpattern
plot(seasonals, type = "b", main = "Seasonal pattern for beer production")

trendpattern2 = filter(beerprod, filter = c(1/4, 1/4, 1/4, 1/4), sides=1)
plot(beerprod, type= "b", main = "moving average annual trend")
lines(trendpattern2)


options(digits = 9)
data(bmw, package = "evir")
Box.test(bmw, lag = 5, type = "Ljung-Box")

    Box-Ljung test

data:  bmw
X-squared = 44.98701, df = 5, p-value = 1.45972e-08
arima(x=bmw, order=c(1,0,0))

Call:
arima(x = bmw, order = c(1, 0, 0))

Coefficients:
           ar1  intercept
      0.081116   0.000340
s.e.  0.012722   0.000205

sigma^2 estimated as 0.00021626:  log likelihood = 17212.34,  aic = -34418.68
fitAR1 = arima(bmw, order = c(1,0,0))
Box.test(residuals(fitAR1), lag = 10, type = "Ljung-Box", fitdf = 1)

    Box-Ljung test

data:  residuals(fitAR1)
X-squared = 17.51319, df = 9, p-value = 0.0412603
Box.test(residuals(fitAR1), lag = 15, type = "Ljung-Box", fitdf = 1)

    Box-Ljung test

data:  residuals(fitAR1)
X-squared = 24.09947, df = 14, p-value = 0.0445707
Box.test(residuals(fitAR1), lag = 20, type = "Ljung-Box", fitdf = 1)

    Box-Ljung test

data:  residuals(fitAR1)
X-squared = 39.29885, df = 19, p-value = 0.00404028
data(Mishkin, package = "Ecdat")

x = as.ts(Mishkin[,1], start=1950, frequency=12)
acf(x)

pacf(x)


auto.arima(diff(x), max.p=20, max.q=0,ic="aic")
Series: diff(x) 
ARIMA(10,0,0)(2,0,0)[12] with zero mean 

Coefficients:
            ar1        ar2        ar3        ar4        ar5        ar6        ar7        ar8
      -0.639976  -0.522701  -0.557229  -0.468459  -0.404792  -0.326150  -0.221002  -0.175748
s.e.   0.045994   0.055234   0.060157   0.065184   0.067415   0.067535   0.066081   0.060262
            ar9       ar10      sar1       sar2
      -0.087721  -0.069657  0.089960  -0.032370
s.e.   0.055756   0.047502  0.050207   0.051833

sigma^2 estimated as 8.61379:  log likelihood=-1217.38
AIC=2460.75   AICc=2461.52   BIC=2515.28
auto.arima(diff(x), max.p=10, max.q=0,ic="bic")
Series: diff(x) 
ARIMA(6,0,0)(2,0,0)[12] with zero mean 

Coefficients:
            ar1        ar2        ar3        ar4        ar5        ar6      sar1       sar2
      -0.608313  -0.459024  -0.470350  -0.352204  -0.260538  -0.160104  0.101906  -0.010042
s.e.   0.045463   0.052406   0.054742   0.055632   0.053044   0.046181  0.048743   0.050094

sigma^2 estimated as 8.7618:  log likelihood=-1223.49
AIC=2464.98   AICc=2465.36   BIC=2502.73
data(Mishkin, package = "Ecdat")

y = as.vector(Mishkin[,1])
fitMA3 = arima(diff(y), order = c(0,0,3))
fitMA3 

Call:
arima(x = diff(y), order = c(0, 0, 3))

Coefficients:
            ma1        ma2        ma3  intercept
      -0.632950  -0.102734  -0.108172  -0.000156
s.e.   0.046017   0.051399   0.046985   0.020892

sigma^2 estimated as 8.5046:  log likelihood = -1220.26,  aic = 2450.52
Box.test(fitMA3$residuals, lag = 5, type = "Ljung", fitdf = 3)

    Box-Ljung test

data:  fitMA3$residuals
X-squared = 0.862017, df = 2, p-value = 0.649853
Box.test(fitMA3$residuals, lag = 10, type = "Ljung", fitdf = 3)

    Box-Ljung test

data:  fitMA3$residuals
X-squared = 4.205325, df = 7, p-value = 0.755848
Box.test(fitMA3$residuals, lag = 15, type = "Ljung", fitdf = 3)

    Box-Ljung test

data:  fitMA3$residuals
X-squared = 13.71089, df = 12, p-value = 0.31955
Box.test(fitMA3$residuals, lag = 20, type = "Ljung", fitdf = 3)

    Box-Ljung test

data:  fitMA3$residuals
X-squared = 26.45871, df = 17, p-value = 0.0664989
fitMA2 = arima(diff(y), order = c(0,0,2))
fitMA2 

Call:
arima(x = diff(y), order = c(0, 0, 2))

Coefficients:
            ma1        ma2  intercept
      -0.662088  -0.166016   0.000145
s.e.   0.041879   0.040721   0.023044

sigma^2 estimated as 8.5951:  log likelihood = -1222.85,  aic = 2453.7
Box.test(fitMA2$residuals, lag = 5, type = "Ljung", fitdf = 2)

    Box-Ljung test

data:  fitMA2$residuals
X-squared = 6.608554, df = 3, p-value = 0.0854783
Box.test(fitMA2$residuals, lag = 10, type = "Ljung", fitdf = 2)

    Box-Ljung test

data:  fitMA2$residuals
X-squared = 8.838851, df = 8, p-value = 0.356072
Box.test(fitMA2$residuals, lag = 15, type = "Ljung", fitdf = 2)

    Box-Ljung test

data:  fitMA2$residuals
X-squared = 15.8254, df = 13, p-value = 0.258687
Box.test(fitMA2$residuals, lag = 20, type = "Ljung", fitdf = 2)

    Box-Ljung test

data:  fitMA2$residuals
X-squared = 29.48198, df = 18, p-value = 0.0427975
data(Capm, package = "Ecdat")
y = as.ts(Capm[,1], start=1960-01, frequency=12)

fitARMA1 = arima(y, order = c(1,0,1))
fitARMA1

Call:
arima(x = y, order = c(1, 0, 1))

Coefficients:
            ar1       ma1  intercept
      -0.818604  0.896077   0.665177
s.e.   0.088075  0.068410   0.206508

sigma^2 estimated as 20.2452:  log likelihood = -1508.26,  aic = 3024.52
acf(residuals(fitARMA1),lag.max=20,main="ARMA1")

qqnorm(residuals(fitARMA1),main="ARMA1") ; qqline(residuals(fitARMA1))

plot(residuals(fitARMA1))

polyroot(c(1,-1.229,+0.233))
[1] 1.00525088+0i 4.26942723-0i
data(Mishkin, package = 'Ecdat')
y = as.vector(Mishkin[,1])

adf.test(y)

    Augmented Dickey-Fuller Test

data:  y
Dickey-Fuller = -3.865109, Lag order = 7, p-value = 0.0157645
alternative hypothesis: stationary
pp.test(y)
p-value smaller than printed p-value

    Phillips-Perron Unit Root Test

data:  y
Dickey-Fuller Z(alpha) = -248.7537, Truncation lag parameter = 5, p-value = 0.01
alternative hypothesis: stationary
kpss.test(y)
p-value smaller than printed p-value

    KPSS Test for Level Stationarity

data:  y
KPSS Level = 2.510004, Truncation lag parameter = 5, p-value = 0.01

auto.arima(y, max.p=5, max.q = 5, ic='aic', trace = FALSE)
Series: y 
ARIMA(1,1,1) 

Coefficients:
           ar1        ma1
      0.238309  -0.877177
s.e.  0.055043   0.026929

sigma^2 estimated as 8.58719:  log likelihood=-1221.62
AIC=2449.25   AICc=2449.29   BIC=2461.83
auto.arima(y, max.p=5, max.q = 5, ic='bic', trace = FALSE)
Series: y 
ARIMA(1,1,1) 

Coefficients:
           ar1        ma1
      0.238309  -0.877177
s.e.  0.055043   0.026929

sigma^2 estimated as 8.58719:  log likelihood=-1221.62
AIC=2449.25   AICc=2449.29   BIC=2461.83
fitARIMA111= arima(y, c(1,1,1))
par(mfrow=c(1,1))
acf(fitARIMA111$resid)

Box.test(fitARIMA111$resid, lag = 15, fitdf =2)

    Box-Pierce test

data:  fitARIMA111$resid
X-squared = 14.44181, df = 13, p-value = 0.343481

data(Hstarts, package="Ecdat") 
x = ts(Hstarts[,1], start=1960, frequency=4)

fit1 = arima(x, c(1,1,1), seasonal = list(order = c(1,1,1), period = 4))
fit1

Call:
arima(x = x, order = c(1, 1, 1), seasonal = list(order = c(1, 1, 1), period = 4))

Coefficients:
           ar1        ma1       sar1       sma1
      0.688314  -0.881866  -0.155035  -0.765925
s.e.  0.238596   0.187552   0.097431   0.081197

sigma^2 estimated as 0.0257251:  log likelihood = 64.13,  aic = -118.27
fit2 = arima(x, c(1,1,1), seasonal = list(order = c(0,1,1), period = 4))
fit2

Call:
arima(x = x, order = c(1, 1, 1), seasonal = list(order = c(0, 1, 1), period = 4))

Coefficients:
           ar1        ma1       sma1
      0.674779  -0.890084  -0.822011
s.e.  0.141937   0.104565   0.051223

sigma^2 estimated as 0.0260917:  log likelihood = 62.91,  aic = -117.82
Box.test(residuals(fit2), type="Ljung-Box", lag = 10, fitdf=3)

    Box-Ljung test

data:  residuals(fit2)
X-squared = 8.449959, df = 7, p-value = 0.294592
Box.test(residuals(fit1), type="Ljung-Box", lag = 10, fitdf=4)

    Box-Ljung test

data:  residuals(fit1)
X-squared = 5.821895, df = 6, p-value = 0.443435

library("fracdiff")
library("longmemo")

D = c(-.35, .35)

set.seed("09201948")

par(mfrow=c(length(D)+1,2))

for (i in 1:length(D))
{
H = D[i] + 1/2
x = simARMA0(2500,H)
plot(x,main=toString(paste("d =", D[i])) )
acf(x,main=toString(paste("d =", D[i])) )
}

d= .7-1
H = d + 1/2
y = simARMA0(2500,H)
x = cumsum(y)
plot(x,main="d = 0.7",type="l")
acf(x,main="d = 0.7")


par(mfrow=c(1,2))
acf(diffseries(x,.7),main =expression(paste(Delta^0.7, Y)))
acf(diff(x),main = expression(paste(Delta, Y)))

n = 10500
set.seed("2340")
e = rnorm(n)
a = e
y = e
sig2= 2^2
omega = 1
alpha =  0.08
beta = 0.90
phi = 0.8
mu = 0.1

for (t in 2:n)
{
a[t] = sqrt(sig2[t])*e[t]
y[t] = mu + phi*(y[t-1]-mu) + a[t]
sig2[t+1] = omega + alpha*a[t]^2 + beta*sig2[t]
}

pdf ("garcho3.pdf", width = 9, height = 6)
#
par(mfrow=c(2,4))

plot(e[10001:n], type="l",xlab="t", ylab = expression(epsilon), main="(a) white noise")


plot(sqrt(sig2[10001:n]), type="l",xlab="t",ylim=expression(sigma[t]), main="(b) conditional std dev")
Error in plot.window(...) : invalid 'ylim' value
library(xts)
Loading required package: zoo

Attaching package: ‘zoo’

The following objects are masked from ‘package:base’:

    as.Date, as.Date.numeric
library(fGarch)
Loading required package: timeDate
Loading required package: timeSeries

Attaching package: ‘timeSeries’

The following object is masked from ‘package:zoo’:

    time<-

Loading required package: fBasics
garchFit(formula = ~arma(1,0) + garch(1, 1), data = bmw, cond.dist = "norm")

Series Initialization:
 ARMA Model:                arma
 Formula Mean:              ~ arma(1, 0)
 GARCH Model:               garch
 Formula Variance:          ~ garch(1, 1)
 ARMA Order:                1 0
 Max ARMA Order:            1
 GARCH Order:               1 1
 Max GARCH Order:           1
 Maximum Order:             1
 Conditional Dist:          norm
 h.start:                   2
 llh.start:                 1
 Length of Series:          6146
 Recursion Init:            mci
 Series Scale:              0.0147555259

Parameter Initialization:
 Initial Parameters:          $params
 Limits of Transformations:   $U, $V
 Which Parameters are Fixed?  $includes
 Parameter Matrix:
 Index List of Parameters to be Optimized:
    mu    ar1  omega alpha1  beta1 
     1      2      3      4      6 
 Persistence:                  0.9 


--- START OF TRACE ---
Selected Algorithm: nlminb 

R coded nlminb Solver: 

  0:     8197.7076: 0.0231375 0.0811175 0.100000 0.100000 0.800000
  1:     8191.4368: 0.0231376 0.0820231 0.0829234 0.101366 0.792310
  2:     8171.2956: 0.0231377 0.0834840 0.0819964 0.118120 0.800662
  3:     8165.8147: 0.0231379 0.0858254 0.0654065 0.126643 0.800937
  4:     8161.3630: 0.0231384 0.0913135 0.0611973 0.134195 0.816703
  5:     8157.6333: 0.0231397 0.0999004 0.0505134 0.126605 0.827092
  6:     8156.0110: 0.0231426 0.104737 0.0482985 0.115063 0.840945
  7:     8155.6452: 0.0231478 0.0932725 0.0422975 0.107529 0.852311
  8:     8155.5196: 0.0231478 0.0933535 0.0431822 0.107833 0.852922
  9:     8155.4436: 0.0231494 0.0943324 0.0430573 0.107351 0.852714
 10:     8155.4195: 0.0231526 0.0961691 0.0435774 0.106424 0.853420
 11:     8155.3582: 0.0231639 0.0995904 0.0416707 0.105718 0.855419
 12:     8155.3059: 0.0232332 0.0979839 0.0420297 0.103245 0.856914
 13:     8155.3026: 0.0232332 0.0979930 0.0420814 0.103311 0.856994
 14:     8155.3008: 0.0232333 0.0980164 0.0419790 0.103351 0.857025
 15:     8155.2995: 0.0232358 0.0980415 0.0419474 0.103372 0.857226
 16:     8155.2958: 0.0232430 0.0980644 0.0417613 0.103189 0.857457
 17:     8155.2923: 0.0232594 0.0981493 0.0415880 0.102942 0.857970
 18:     8155.2896: 0.0233012 0.0984606 0.0414384 0.102753 0.858151
 19:     8155.2873: 0.0233442 0.0985178 0.0414683 0.102771 0.858211
 20:     8155.2833: 0.0234833 0.0978777 0.0412894 0.102594 0.858545
 21:     8155.2571: 0.0249755 0.0992718 0.0413171 0.103253 0.857706
 22:     8155.2462: 0.0256650 0.100021 0.0408549 0.101828 0.860009
 23:     8155.2300: 0.0263554 0.0991767 0.0414259 0.103072 0.858170
 24:     8155.2279: 0.0263554 0.0991727 0.0413643 0.103032 0.858128
 25:     8155.2271: 0.0263556 0.0990221 0.0414014 0.102795 0.858300
 26:     8155.2266: 0.0263708 0.0990310 0.0413480 0.102755 0.858330
 27:     8155.2238: 0.0266534 0.0992512 0.0410613 0.102259 0.859106
 28:     8155.2220: 0.0269367 0.0989338 0.0410123 0.102234 0.859194
 29:     8155.2214: 0.0271533 0.0985704 0.0408774 0.102070 0.859482
 30:     8155.2214: 0.0271811 0.0986000 0.0409012 0.102100 0.859430
 31:     8155.2214: 0.0271709 0.0985960 0.0408969 0.102096 0.859439

Final Estimate of the Negative LLH:
 LLH:  -17757.1604    norm LLH:  -2.88922233 
            mu            ar1          omega         alpha1          beta1 
4.00921646e-04 9.85960084e-02 8.90429955e-06 1.02096121e-01 8.59438881e-01 

R-optimhess Difference Approximated Hessian Matrix:
                    mu             ar1           omega          alpha1           beta1
mu     -4.03227080e+07    -9110.700614 -2.47133587e+08 -1.02365122e+04  1.56766704e+02
ar1    -9.11070061e+03    -4921.397928 -3.17579246e+06 -3.30680113e+02 -8.62839816e+02
omega  -2.47133587e+08 -3175792.459279 -7.19620950e+12 -7.10411367e+08 -1.06585562e+09
alpha1 -1.02365122e+04     -330.680113 -7.10411367e+08 -1.15660301e+05 -1.34997175e+05
beta1   1.56766704e+02     -862.839816 -1.06585562e+09 -1.34997175e+05 -1.81389074e+05
attr(,"time")
Time difference of 0.0703678131 secs

--- END OF TRACE ---


Time to Estimate Parameters:
 Time difference of 0.372771025 secs
Using formula(x) is deprecated when x is a character vector of length > 1.
  Consider formula(paste(x, collapse = " ")) instead.

Title:
 GARCH Modelling 

Call:
 garchFit(formula = ~arma(1, 0) + garch(1, 1), data = bmw, cond.dist = "norm") 

Mean and Variance Equation:
 data ~ arma(1, 0) + garch(1, 1)
<environment: 0x7fe8b7508820>
 [data = bmw]

Conditional Distribution:
 norm 

Coefficient(s):
        mu         ar1       omega      alpha1       beta1  
4.0092e-04  9.8596e-02  8.9043e-06  1.0210e-01  8.5944e-01  

Std. Errors:
 based on Hessian 

Error Analysis:
        Estimate  Std. Error  t value Pr(>|t|)    
mu     4.009e-04   1.579e-04    2.539   0.0111 *  
ar1    9.860e-02   1.431e-02    6.888 5.65e-12 ***
omega  8.904e-06   1.449e-06    6.145 7.97e-10 ***
alpha1 1.021e-01   1.135e-02    8.994  < 2e-16 ***
beta1  8.594e-01   1.581e-02   54.348  < 2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Log Likelihood:
 17757.1604    normalized:  2.88922233 

Description:
 Sun Aug  1 10:17:33 2021 by user:  
summary(garchFit(formula = ~arma(1, 0) + garch(1, 1), data = bmw, cond.dist = "norm"))

Series Initialization:
 ARMA Model:                arma
 Formula Mean:              ~ arma(1, 0)
 GARCH Model:               garch
 Formula Variance:          ~ garch(1, 1)
 ARMA Order:                1 0
 Max ARMA Order:            1
 GARCH Order:               1 1
 Max GARCH Order:           1
 Maximum Order:             1
 Conditional Dist:          norm
 h.start:                   2
 llh.start:                 1
 Length of Series:          6146
 Recursion Init:            mci
 Series Scale:              0.0147555259

Parameter Initialization:
 Initial Parameters:          $params
 Limits of Transformations:   $U, $V
 Which Parameters are Fixed?  $includes
 Parameter Matrix:
 Index List of Parameters to be Optimized:
    mu    ar1  omega alpha1  beta1 
     1      2      3      4      6 
 Persistence:                  0.9 


--- START OF TRACE ---
Selected Algorithm: nlminb 

R coded nlminb Solver: 

  0:     8197.7076: 0.0231375 0.0811175 0.100000 0.100000 0.800000
  1:     8191.4368: 0.0231376 0.0820231 0.0829234 0.101366 0.792310
  2:     8171.2956: 0.0231377 0.0834840 0.0819964 0.118120 0.800662
  3:     8165.8147: 0.0231379 0.0858254 0.0654065 0.126643 0.800937
  4:     8161.3630: 0.0231384 0.0913135 0.0611973 0.134195 0.816703
  5:     8157.6333: 0.0231397 0.0999004 0.0505134 0.126605 0.827092
  6:     8156.0110: 0.0231426 0.104737 0.0482985 0.115063 0.840945
  7:     8155.6452: 0.0231478 0.0932725 0.0422975 0.107529 0.852311
  8:     8155.5196: 0.0231478 0.0933535 0.0431822 0.107833 0.852922
  9:     8155.4436: 0.0231494 0.0943324 0.0430573 0.107351 0.852714
 10:     8155.4195: 0.0231526 0.0961691 0.0435774 0.106424 0.853420
 11:     8155.3582: 0.0231639 0.0995904 0.0416707 0.105718 0.855419
 12:     8155.3059: 0.0232332 0.0979839 0.0420297 0.103245 0.856914
 13:     8155.3026: 0.0232332 0.0979930 0.0420814 0.103311 0.856994
 14:     8155.3008: 0.0232333 0.0980164 0.0419790 0.103351 0.857025
 15:     8155.2995: 0.0232358 0.0980415 0.0419474 0.103372 0.857226
 16:     8155.2958: 0.0232430 0.0980644 0.0417613 0.103189 0.857457
 17:     8155.2923: 0.0232594 0.0981493 0.0415880 0.102942 0.857970
 18:     8155.2896: 0.0233012 0.0984606 0.0414384 0.102753 0.858151
 19:     8155.2873: 0.0233442 0.0985178 0.0414683 0.102771 0.858211
 20:     8155.2833: 0.0234833 0.0978777 0.0412894 0.102594 0.858545
 21:     8155.2571: 0.0249755 0.0992718 0.0413171 0.103253 0.857706
 22:     8155.2462: 0.0256650 0.100021 0.0408549 0.101828 0.860009
 23:     8155.2300: 0.0263554 0.0991767 0.0414259 0.103072 0.858170
 24:     8155.2279: 0.0263554 0.0991727 0.0413643 0.103032 0.858128
 25:     8155.2271: 0.0263556 0.0990221 0.0414014 0.102795 0.858300
 26:     8155.2266: 0.0263708 0.0990310 0.0413480 0.102755 0.858330
 27:     8155.2238: 0.0266534 0.0992512 0.0410613 0.102259 0.859106
 28:     8155.2220: 0.0269367 0.0989338 0.0410123 0.102234 0.859194
 29:     8155.2214: 0.0271533 0.0985704 0.0408774 0.102070 0.859482
 30:     8155.2214: 0.0271811 0.0986000 0.0409012 0.102100 0.859430
 31:     8155.2214: 0.0271709 0.0985960 0.0408969 0.102096 0.859439

Final Estimate of the Negative LLH:
 LLH:  -17757.1604    norm LLH:  -2.88922233 
            mu            ar1          omega         alpha1          beta1 
4.00921646e-04 9.85960084e-02 8.90429955e-06 1.02096121e-01 8.59438881e-01 

R-optimhess Difference Approximated Hessian Matrix:
                    mu             ar1           omega          alpha1           beta1
mu     -4.03227080e+07    -9110.700614 -2.47133587e+08 -1.02365122e+04  1.56766704e+02
ar1    -9.11070061e+03    -4921.397928 -3.17579246e+06 -3.30680113e+02 -8.62839816e+02
omega  -2.47133587e+08 -3175792.459279 -7.19620950e+12 -7.10411367e+08 -1.06585562e+09
alpha1 -1.02365122e+04     -330.680113 -7.10411367e+08 -1.15660301e+05 -1.34997175e+05
beta1   1.56766704e+02     -862.839816 -1.06585562e+09 -1.34997175e+05 -1.81389074e+05
attr(,"time")
Time difference of 0.067045927 secs

--- END OF TRACE ---


Time to Estimate Parameters:
 Time difference of 0.349786997 secs
Using formula(x) is deprecated when x is a character vector of length > 1.
  Consider formula(paste(x, collapse = " ")) instead.

Title:
 GARCH Modelling 

Call:
 garchFit(formula = ~arma(1, 0) + garch(1, 1), data = bmw, cond.dist = "norm") 

Mean and Variance Equation:
 data ~ arma(1, 0) + garch(1, 1)
<environment: 0x7fe8d64ecea0>
 [data = bmw]

Conditional Distribution:
 norm 

Coefficient(s):
        mu         ar1       omega      alpha1       beta1  
4.0092e-04  9.8596e-02  8.9043e-06  1.0210e-01  8.5944e-01  

Std. Errors:
 based on Hessian 

Error Analysis:
        Estimate  Std. Error  t value Pr(>|t|)    
mu     4.009e-04   1.579e-04    2.539   0.0111 *  
ar1    9.860e-02   1.431e-02    6.888 5.65e-12 ***
omega  8.904e-06   1.449e-06    6.145 7.97e-10 ***
alpha1 1.021e-01   1.135e-02    8.994  < 2e-16 ***
beta1  8.594e-01   1.581e-02   54.348  < 2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Log Likelihood:
 17757.1604    normalized:  2.88922233 

Description:
 Sun Aug  1 10:17:34 2021 by user:  


Standardised Residuals Tests:
                                Statistic  p-Value     
 Jarque-Bera Test   R    Chi^2  11377.9945 0           
 Shapiro-Wilk Test  R    W      NA         NA          
 Ljung-Box Test     R    Q(10)  15.1569254 0.126444518 
 Ljung-Box Test     R    Q(15)  20.0934511 0.168377031 
 Ljung-Box Test     R    Q(20)  30.5478816 0.0614491696
 Ljung-Box Test     R^2  Q(10)  5.03271737 0.88898177  
 Ljung-Box Test     R^2  Q(15)  7.53927234 0.940924545 
 Ljung-Box Test     R^2  Q(20)  9.27722902 0.979468118 
 LM Arch Test       R    TR^2   6.0325397  0.914432863 

Information Criterion Statistics:
        AIC         BIC         SIC        HQIC 
-5.77681758 -5.77134772 -5.77681890 -5.77492037 
library(xts)
library(rugarch)
Loading required package: parallel

Attaching package: ‘rugarch’

The following object is masked from ‘package:stats’:

    sigma
data(bmw, package="evir")
arma.garch.norm = ugarchspec(mean.model=list(armaOrder=c(1,0)), variance.model=list(garchOrder=c(1,1)))
bmw.garch.norm = ugarchfit(data=bmw, spec=arma.garch.norm)
show(bmw.garch.norm)

*---------------------------------*
*          GARCH Model Fit        *
*---------------------------------*

Conditional Variance Dynamics   
-----------------------------------
GARCH Model : sGARCH(1,1)
Mean Model  : ARFIMA(1,0,0)
Distribution    : norm 

Optimal Parameters
------------------------------------
        Estimate  Std. Error  t value Pr(>|t|)
mu      0.000453    0.000175   2.5937 0.009495
ar1     0.098133    0.014261   6.8811 0.000000
omega   0.000009    0.000000  23.0627 0.000000
alpha1  0.099405    0.005593  17.7730 0.000000
beta1   0.863666    0.006283 137.4528 0.000000

Robust Standard Errors:
        Estimate  Std. Error  t value Pr(>|t|)
mu      0.000453    0.000186   2.4380  0.01477
ar1     0.098133    0.014073   6.9732  0.00000
omega   0.000009    0.000001  13.4483  0.00000
alpha1  0.099405    0.007903  12.5785  0.00000
beta1   0.863666    0.010529  82.0234  0.00000

LogLikelihood : 17751.9289 

Information Criteria
------------------------------------
                    
Akaike       -5.7751
Bayes        -5.7696
Shibata      -5.7751
Hannan-Quinn -5.7732

Weighted Ljung-Box Test on Standardized Residuals
------------------------------------
                        statistic p-value
Lag[1]                     0.7790  0.3775
Lag[2*(p+q)+(p+q)-1][2]    0.9161  0.7890
Lag[4*(p+q)+(p+q)-1][5]    3.3274  0.3536
d.o.f=1
H0 : No serial correlation

Weighted Ljung-Box Test on Standardized Squared Residuals
------------------------------------
                        statistic p-value
Lag[1]                     0.2769  0.5988
Lag[2*(p+q)+(p+q)-1][5]    1.0262  0.8537
Lag[4*(p+q)+(p+q)-1][9]    1.7209  0.9356
d.o.f=2

Weighted ARCH LM Tests
------------------------------------
            Statistic Shape Scale P-Value
ARCH Lag[3]    0.1922 0.500 2.000  0.6611
ARCH Lag[5]    1.1094 1.440 1.667  0.7008
ARCH Lag[7]    1.2290 2.315 1.543  0.8737

Nyblom stability test
------------------------------------
Joint Statistic:  38.6063
Individual Statistics:              
mu     0.05867
ar1    0.21574
omega  5.66190
alpha1 0.18834
beta1  0.18245

Asymptotic Critical Values (10% 5% 1%)
Joint Statistic:         1.28 1.47 1.88
Individual Statistic:    0.35 0.47 0.75

Sign Bias Test
------------------------------------


Adjusted Pearson Goodness-of-Fit Test:
------------------------------------
  group statistic p-value(g-1)
1    20     493.2    1.477e-92
2    30     513.4    4.975e-90
3    40     559.3    2.484e-93
4    50     585.5    5.611e-93


Elapsed time : 0.687120914 
plot(bmw.garch.norm, which="all")

please wait...calculating quantiles...
par(mfrow = c(3,2))

for(i in c(1,3,10,11,8,9)) 
  plot(bmw.garch.norm, which=i)

library(MASS)
bmw.garch.norm <- garchFit(formula = ~arma(1, 0) + garch(1, 1), data = bmw, cond.dist = "norm")

Series Initialization:
 ARMA Model:                arma
 Formula Mean:              ~ arma(1, 0)
 GARCH Model:               garch
 Formula Variance:          ~ garch(1, 1)
 ARMA Order:                1 0
 Max ARMA Order:            1
 GARCH Order:               1 1
 Max GARCH Order:           1
 Maximum Order:             1
 Conditional Dist:          norm
 h.start:                   2
 llh.start:                 1
 Length of Series:          6146
 Recursion Init:            mci
 Series Scale:              0.0147555259

Parameter Initialization:
 Initial Parameters:          $params
 Limits of Transformations:   $U, $V
 Which Parameters are Fixed?  $includes
 Parameter Matrix:
 Index List of Parameters to be Optimized:
    mu    ar1  omega alpha1  beta1 
     1      2      3      4      6 
 Persistence:                  0.9 


--- START OF TRACE ---
Selected Algorithm: nlminb 

R coded nlminb Solver: 

  0:     8197.7076: 0.0231375 0.0811175 0.100000 0.100000 0.800000
  1:     8191.4368: 0.0231376 0.0820231 0.0829234 0.101366 0.792310
  2:     8171.2956: 0.0231377 0.0834840 0.0819964 0.118120 0.800662
  3:     8165.8147: 0.0231379 0.0858254 0.0654065 0.126643 0.800937
  4:     8161.3630: 0.0231384 0.0913135 0.0611973 0.134195 0.816703
  5:     8157.6333: 0.0231397 0.0999004 0.0505134 0.126605 0.827092
  6:     8156.0110: 0.0231426 0.104737 0.0482985 0.115063 0.840945
  7:     8155.6452: 0.0231478 0.0932725 0.0422975 0.107529 0.852311
  8:     8155.5196: 0.0231478 0.0933535 0.0431822 0.107833 0.852922
  9:     8155.4436: 0.0231494 0.0943324 0.0430573 0.107351 0.852714
 10:     8155.4195: 0.0231526 0.0961691 0.0435774 0.106424 0.853420
 11:     8155.3582: 0.0231639 0.0995904 0.0416707 0.105718 0.855419
 12:     8155.3059: 0.0232332 0.0979839 0.0420297 0.103245 0.856914
 13:     8155.3026: 0.0232332 0.0979930 0.0420814 0.103311 0.856994
 14:     8155.3008: 0.0232333 0.0980164 0.0419790 0.103351 0.857025
 15:     8155.2995: 0.0232358 0.0980415 0.0419474 0.103372 0.857226
 16:     8155.2958: 0.0232430 0.0980644 0.0417613 0.103189 0.857457
 17:     8155.2923: 0.0232594 0.0981493 0.0415880 0.102942 0.857970
 18:     8155.2896: 0.0233012 0.0984606 0.0414384 0.102753 0.858151
 19:     8155.2873: 0.0233442 0.0985178 0.0414683 0.102771 0.858211
 20:     8155.2833: 0.0234833 0.0978777 0.0412894 0.102594 0.858545
 21:     8155.2571: 0.0249755 0.0992718 0.0413171 0.103253 0.857706
 22:     8155.2462: 0.0256650 0.100021 0.0408549 0.101828 0.860009
 23:     8155.2300: 0.0263554 0.0991767 0.0414259 0.103072 0.858170
 24:     8155.2279: 0.0263554 0.0991727 0.0413643 0.103032 0.858128
 25:     8155.2271: 0.0263556 0.0990221 0.0414014 0.102795 0.858300
 26:     8155.2266: 0.0263708 0.0990310 0.0413480 0.102755 0.858330
 27:     8155.2238: 0.0266534 0.0992512 0.0410613 0.102259 0.859106
 28:     8155.2220: 0.0269367 0.0989338 0.0410123 0.102234 0.859194
 29:     8155.2214: 0.0271533 0.0985704 0.0408774 0.102070 0.859482
 30:     8155.2214: 0.0271811 0.0986000 0.0409012 0.102100 0.859430
 31:     8155.2214: 0.0271709 0.0985960 0.0408969 0.102096 0.859439

Final Estimate of the Negative LLH:
 LLH:  -17757.1604    norm LLH:  -2.88922233 
            mu            ar1          omega         alpha1          beta1 
4.00921646e-04 9.85960084e-02 8.90429955e-06 1.02096121e-01 8.59438881e-01 

R-optimhess Difference Approximated Hessian Matrix:
                    mu             ar1           omega          alpha1           beta1
mu     -4.03227080e+07    -9110.700614 -2.47133587e+08 -1.02365122e+04  1.56766704e+02
ar1    -9.11070061e+03    -4921.397928 -3.17579246e+06 -3.30680113e+02 -8.62839816e+02
omega  -2.47133587e+08 -3175792.459279 -7.19620950e+12 -7.10411367e+08 -1.06585562e+09
alpha1 -1.02365122e+04     -330.680113 -7.10411367e+08 -1.15660301e+05 -1.34997175e+05
beta1   1.56766704e+02     -862.839816 -1.06585562e+09 -1.34997175e+05 -1.81389074e+05
attr(,"time")
Time difference of 0.065267086 secs

--- END OF TRACE ---


Time to Estimate Parameters:
 Time difference of 0.342492104 secs
Using formula(x) is deprecated when x is a character vector of length > 1.
  Consider formula(paste(x, collapse = " ")) instead.
e = residuals(bmw.garch.norm, standardize=TRUE)
fitdistr(e,"t")
NaNs producedNaNs producedNaNs producedNaNs producedNaNs produced
        m               s               df      
  -0.0239865051    0.7263501855    4.1097516314 
 ( 0.0109243437) ( 0.0121327797) ( 0.2357963744)
n = length(e)
grid = (1:n)/(n+1)
qqplot(sort(as.numeric(e)), qt(grid,df=4), main="t-plot, df=4",xlab= "Standardized residual quantiles",ylab="t-quantiles")

qqnorm(sort(as.numeric(e)));
qqline(y, col = 2)

garchFit(formula = ~arma(1, 1) + garch(1, 1), data = bmw,
cond.dist = "std")

Series Initialization:
 ARMA Model:                arma
 Formula Mean:              ~ arma(1, 1)
 GARCH Model:               garch
 Formula Variance:          ~ garch(1, 1)
 ARMA Order:                1 1
 Max ARMA Order:            1
 GARCH Order:               1 1
 Max GARCH Order:           1
 Maximum Order:             1
 Conditional Dist:          std
 h.start:                   2
 llh.start:                 1
 Length of Series:          6146
 Recursion Init:            mci
 Series Scale:              0.0147555259

Parameter Initialization:
 Initial Parameters:          $params
 Limits of Transformations:   $U, $V
 Which Parameters are Fixed?  $includes
 Parameter Matrix:
 Index List of Parameters to be Optimized:
    mu    ar1    ma1  omega alpha1  beta1  shape 
     1      2      3      4      5      7     10 
 Persistence:                  0.9 


--- START OF TRACE ---
Selected Algorithm: nlminb 

R coded nlminb Solver: 

  0:     7788.2143: 0.0231079 -0.247788 0.331458 0.100000 0.100000 0.800000  4.00000
  1:     7780.8363: 0.0231063 -0.250241 0.329122 0.0726877 0.115229 0.795726  3.99979
  2:     7768.7866: 0.0231043 -0.252470 0.327035 0.0722958 0.142456 0.811750  4.00011
  3:     7764.5716: 0.0231000 -0.254259 0.325523 0.0449827 0.158395 0.813199  3.99993
  4:     7758.0833: 0.0230808 -0.251634 0.329272 0.0459098 0.136742 0.835893  3.99879
  5:     7756.4790: 0.0230660 -0.259996 0.321726 0.0413723 0.112864 0.852912  3.99854
  6:     7755.9517: 0.0230500 -0.259088 0.323686 0.0312476 0.103218 0.881320  3.99858
  7:     7755.2243: 0.0230443 -0.255528 0.327670 0.0250334 0.0966100 0.884447  3.99868
  8:     7754.1836: 0.0230340 -0.251103 0.333020 0.0318698 0.0980444 0.879602  3.99876
  9:     7753.8694: 0.0230211 -0.258499 0.326588 0.0319674 0.101967 0.876632  3.99903
 10:     7753.8184: 0.0230210 -0.258450 0.326647 0.0315765 0.101741 0.876425  3.99902
 11:     7753.7758: 0.0230190 -0.257612 0.327650 0.0313323 0.101204 0.877826  3.99906
 12:     7753.6748: 0.0230090 -0.258208 0.327841 0.0293620 0.0994561 0.880746  3.99932
 13:     7753.6301: 0.0229765 -0.259765 0.328971 0.0288415 0.0978123 0.883304  4.00028
 14:     7753.5767: 0.0229281 -0.261830 0.330867 0.0289110 0.0965673 0.883349  4.00179
 15:     7753.5580: 0.0228758 -0.263897 0.332903 0.0291444 0.0962478 0.883663  4.00331
 16:     7753.4854: 0.0225334 -0.277016 0.345599 0.0274260 0.0950560 0.886480  4.01294
 17:     7753.4719: 0.0225323 -0.276695 0.345993 0.0279023 0.0940099 0.885988  4.01295
 18:     7753.4538: 0.0225135 -0.277276 0.346610 0.0282692 0.0941910 0.886249  4.01341
 19:     7753.4416: 0.0224933 -0.277896 0.347251 0.0281355 0.0941204 0.886129  4.01389
 20:     7753.4320: 0.0224549 -0.278505 0.349027 0.0282643 0.0937839 0.886357  4.01479
 21:     7753.3038: 0.0213907 -0.311227 0.381075 0.0282387 0.0939363 0.885763  4.03989
 22:     7753.2470: 0.0198191 -0.306779 0.375083 0.0267061 0.0894777 0.890618  4.05075
 23:     7753.1676: 0.0182480 -0.296072 0.364994 0.0269527 0.0904575 0.889971  4.05294
 24:     7753.1165: 0.0166676 -0.287184 0.356580 0.0268598 0.0911759 0.888953  4.04888
 25:     7753.0720: 0.0153164 -0.292789 0.362100 0.0272593 0.0919992 0.888716  4.01193
 26:     7753.0464: 0.0140359 -0.279643 0.349665 0.0276292 0.0924361 0.887756  4.04999
 27:     7753.0121: 0.0124586 -0.288551 0.358749 0.0278506 0.0928403 0.887006  4.04515
 28:     7753.0062: 0.0118041 -0.301139 0.371422 0.0277004 0.0928737 0.887052  4.04617
 29:     7753.0059: 0.0117413 -0.298655 0.368918 0.0278383 0.0929380 0.886806  4.04612
 30:     7753.0059: 0.0117547 -0.298461 0.368732 0.0277939 0.0929303 0.886877  4.04609
 31:     7753.0059: 0.0117695 -0.298774 0.369043 0.0277985 0.0929202 0.886878  4.04618
 32:     7753.0059: 0.0117636 -0.298690 0.368959 0.0277991 0.0929239 0.886874  4.04615

Final Estimate of the Negative LLH:
 LLH:  -18159.376    norm LLH:  -2.9546658 
             mu             ar1             ma1           omega          alpha1           beta1 
 1.73577740e-04 -2.98690367e-01  3.68959354e-01  6.05257206e-06  9.29239158e-02  8.86874472e-01 
          shape 
 4.04614910e+00 

R-optimhess Difference Approximated Hessian Matrix:
                    mu             ar1             ma1           omega          alpha1           beta1
mu     -29626609.54489   -9180.1325271   -5076.0398620  3.83282635e+08  4.36255687e+04  8.02938733e+04
ar1        -9180.13253   -7054.9901407   -7154.4458092  5.81740750e+06  6.97662687e+02  5.41664057e+02
ma1        -5076.03986   -7154.4458092   -7311.3552705  5.58039961e+06  6.38678510e+02  4.84689897e+02
omega  383282634.81092 5817407.5014527 5580399.6067520 -6.20511367e+12 -6.12244683e+08 -9.21076625e+08
alpha1     43625.56871     697.6626869     638.6785101 -6.12244683e+08 -9.56590330e+04 -1.17183443e+05
beta1      80293.87334     541.6640568     484.6898973 -9.21076625e+08 -1.17183443e+05 -1.60599870e+05
shape       2366.26467      37.4272552      35.3858391 -1.30460896e+07 -1.76644179e+03 -2.28726044e+03
                 shape
mu      2.36626467e+03
ar1     3.74272552e+01
ma1     3.53858391e+01
omega  -1.30460896e+07
alpha1 -1.76644179e+03
beta1  -2.28726044e+03
shape  -5.27928019e+01
attr(,"time")
Time difference of 0.194895983 secs

--- END OF TRACE ---


Time to Estimate Parameters:
 Time difference of 0.693755865 secs
Using formula(x) is deprecated when x is a character vector of length > 1.
  Consider formula(paste(x, collapse = " ")) instead.

Title:
 GARCH Modelling 

Call:
 garchFit(formula = ~arma(1, 1) + garch(1, 1), data = bmw, cond.dist = "std") 

Mean and Variance Equation:
 data ~ arma(1, 1) + garch(1, 1)
<environment: 0x7fe907230070>
 [data = bmw]

Conditional Distribution:
 std 

Coefficient(s):
         mu          ar1          ma1        omega       alpha1        beta1        shape  
 1.7358e-04  -2.9869e-01   3.6896e-01   6.0526e-06   9.2924e-02   8.8687e-01   4.0461e+00  

Std. Errors:
 based on Hessian 

Error Analysis:
         Estimate  Std. Error  t value Pr(>|t|)    
mu      1.736e-04   1.855e-04    0.936  0.34929    
ar1    -2.987e-01   1.370e-01   -2.180  0.02924 *  
ma1     3.690e-01   1.345e-01    2.743  0.00608 ** 
omega   6.053e-06   1.344e-06    4.502 6.72e-06 ***
alpha1  9.292e-02   1.312e-02    7.080 1.44e-12 ***
beta1   8.869e-01   1.542e-02   57.528  < 2e-16 ***
shape   4.046e+00   2.315e-01   17.481  < 2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Log Likelihood:
 18159.376    normalized:  2.9546658 

Description:
 Sun Aug  1 10:22:10 2021 by user:  
ma2 <- arima.sim(n = 1000, list(ma = c(-0.2279, 0.2488)))
par(mfrow=c(2,1))
plot(ma2)
acf(ma2)

ar1 <- arima.sim(n = 1000, list(ar = c(0.9))) 
par(mfrow=c(2,1)) 
plot(ar1) 
acf(ar1)

ar5 <- arima.sim(n = 1000, list(ar = c(-0.2279, 0.2488, -0.5, 0.2, 0.6)))
par(mfrow=c(2,1))
plot(ar5)
acf(ar5)

library(prophet) 
library(TSstudio)

model <- tbats(elecequip, use.box.cox = TRUE, use.damped.trend = F,start.p=2,max.p=2,start.q=1,max.q=1)
plot(forecast(model, h=12))

library(fpp2)
── Attaching packages ────────────────────────────────────────────────────────────────────── fpp2 2.4 ──
✓ ggplot2   3.3.3     ✓ expsmooth 2.3  
✓ fma       2.4       
plot(elecequip)


data_points = 1000 
X <- cumsum(rnorm(data_points, 0, 1)) 
Y <- 2*X + 10 + rnorm(data_points, 0, 1)

par(mfrow=c(2,2))
plot(X, main="Time Series X")
plot(diff(X), main="Time Series of First Differences of X") 
plot(Y, main="Time Series Y") 
plot(diff(Y), main="Time Series of First Differences of Y")

adf.test(X, k=1)

    Augmented Dickey-Fuller Test

data:  X
Dickey-Fuller = -2.304394, Lag order = 1, p-value = 0.44946
alternative hypothesis: stationary
adf.test(diff(X), k=0)
p-value smaller than printed p-value

    Augmented Dickey-Fuller Test

data:  diff(X)
Dickey-Fuller = -30.42674, Lag order = 0, p-value = 0.01
alternative hypothesis: stationary
adf.test(Y, k=1)

    Augmented Dickey-Fuller Test

data:  Y
Dickey-Fuller = -2.33245, Lag order = 1, p-value = 0.437584
alternative hypothesis: stationary
adf.test(diff(Y), k=0)
p-value smaller than printed p-value

    Augmented Dickey-Fuller Test

data:  diff(Y)
Dickey-Fuller = -35.79313, Lag order = 0, p-value = 0.01
alternative hypothesis: stationary
lm(Y~X)

Call:
lm(formula = Y ~ X)

Coefficients:
(Intercept)            X  
    9.99348      2.00063  
spread = Y - 10.0009 - 1.997X
Error: unexpected symbol in "spread = Y - 10.0009 - 1.997X"

library(ggplot2) 
data(economics) 
lmMod <- lm(pce ~ pop, data=economics) 
lmMod

Call:
lm(formula = pce ~ pop, data = economics)

Coefficients:
 (Intercept)           pop  
-1.97965e+04   9.57251e-02  
acf(lmMod$residuals) # highly autocorrelated from the picture.


lmtest::dwtest(lmMod)

    Durbin-Watson test

data:  lmMod
DW = 0.002159001, p-value < 2.22e-16
alternative hypothesis: true autocorrelation is greater than 0
scatter.smooth(x=cars$speed, y=cars$dist, main="Dist ~ Speed")


linearMod <- lm(dist ~ speed, data=cars)
linearMod

Call:
lm(formula = dist ~ speed, data = cars)

Coefficients:
(Intercept)        speed  
  -17.57909      3.93241  
# Create Training and Test data - 
set.seed(100) # setting seed to reproduce results of random sampling 
trainingRowIndex <- sample(1:nrow(cars), 0.8*nrow(cars)) # row indices for training data 
trainingData <- cars[trainingRowIndex, ] # model training data 
testData <- cars[-trainingRowIndex, ] # test data
# Build the model on training data - 
lmMod <- lm(dist ~ speed, data=trainingData) # build the model 
distPred <- predict(lmMod, testData) # predict distance 
actuals_preds <- data.frame(cbind(actuals=testData$dist, predicteds=distPred)) # make actuals_predicteds dataframe 

mean((actuals_preds$predicteds - actuals_preds$actuals)^2)
[1] 267.000242
library("ggplot2") 
library("tidyverse")

data(cars)
head(cars)
plot(cars)

summary(cars)
     speed           dist       
 Min.   : 4.0   Min.   :  2.00  
 1st Qu.:12.0   1st Qu.: 26.00  
 Median :15.0   Median : 36.00  
 Mean   :15.4   Mean   : 42.98  
 3rd Qu.:19.0   3rd Qu.: 56.00  
 Max.   :25.0   Max.   :120.00  
boxplot(cars)


cars.loess <- select(cars, speed, dist)
cars.Train <- cars.loess[1:35,] 
cars.Test <- cars.loess[36:50,]

cars.LoessModel <- loess( dist ~ speed, cars.Train, control = loess.control(surface = "direct") )
cars.LoessModel
Call:
loess(formula = dist ~ speed, data = cars.Train, control = loess.control(surface = "direct"))

Number of Observations: 35 
Equivalent Number of Parameters: 4.82 
Residual Standard Error: 13.8708 
forwardSeq <- as.vector(cars.Test$speed)
forwardSeq
 [1] 19 19 19 20 20 20 20 20 22 23 24 24 24 24 25
cars.Predicted <- predict(cars.LoessModel, data.frame(speed=forwardSeq), se=TRUE )
cars.Predicted
$fit
          1           2           3           4           5           6           7           8           9 
 79.7288965  79.7288965  79.7288965 106.5018421 106.5018421 106.5018421 106.5018421 106.5018421 183.8314219 
         10          11          12          13          14          15 
234.6318396 293.6078199 293.6078199 293.6078199 293.6078199 360.7953680 

$se.fit
          1           2           3           4           5           6           7           8           9 
 12.4673442  12.4673442  12.4673442  21.9702630  21.9702630  21.9702630  21.9702630  21.9702630  50.5278805 
         10          11          12          13          14          15 
 69.6316220  92.0038632  92.0038632  92.0038632  92.0038632 117.6754780 

$residual.scale
[1] 13.8707798

$df
[1] 29.5547794
cars.Test$predicted_dist <- cars.Predicted$fit
cars.Test
cars.TestPlot <- select(cars.Test, speed, predicted_dist) 
cars.TestPlot <- rename(cars.TestPlot, dist = predicted_dist)

ggplot(cars.Train, aes(x = speed, y = dist)) + geom_point() + 
  geom_smooth(stat="smooth", position = "identity", method = "loess", formula = y ~ x, se = TRUE, na.rm = FALSE, inherit.aes = TRUE)+ geom_line(data = cars.TestPlot, aes(y=dist, x=speed, colour="#000099"), show.legend = FALSE, size = 2)

ggplot(cars.loess, aes(x = speed, y = dist)) + geom_point() + geom_smooth( stat="smooth", position = "identity", method = "loess", formula = y ~ x, se = TRUE, na.rm = FALSE, inherit.aes = TRUE ) + geom_line(data = cars.TestPlot, aes(y=dist, x=speed, colour="#000099"), show.legend = FALSE, size = 2)


## 70% of the sample size
smp_size <- floor(0.70 * nrow(cars.loess))
## set the seed to make your partition reproducible 
set.seed(123) 
train_ind <- sample(seq_len(nrow(cars.loess)), size = smp_size) 
cars.Train <- cars.loess[train_ind, ] 
cars.Test <- cars.loess[-train_ind, ]

cars.LoessModel <- loess(dist ~ speed, cars.Train, control = loess.control(surface = "direct"))
forwardSeq <- as.vector(cars.Test$speed)
cars.Predicted <- predict(cars.LoessModel, data.frame(speed=forwardSeq), se=TRUE) 
cars.Test$predicted_dist <- cars.Predicted$fit 
cars.TestPlot <- select(cars.Test, speed, predicted_dist) 
cars.TestPlot <- rename(cars.TestPlot, dist = predicted_dist)

ggplot(cars.loess, aes(x = speed, y = dist)) + geom_point() + geom_smooth( stat="smooth", position = "identity", method = "loess", formula = y ~ x, se = TRUE, na.rm = FALSE, inherit.aes = TRUE ) + geom_line(data = cars.TestPlot, aes(y=dist, x=speed, colour="#000099"), show.legend = FALSE, size = 2)

library(fpp2)
plot(elecequip)

library(forecast)
par(mfrow=c(2,1))

model <- naive(elecequip)
plot(predict(model, n.ahead=24))
model <- snaive(elecequip)
plot(predict(model, n.ahead=24))

plot(stl(elecequip, t.window=13, s.window="periodic", robust=TRUE))
par(mfrow=c(1,1))

model <- stl(elecequip, t.window=13, s.window="periodic", robust=TRUE)
plot(forecast(model, method = "naive", h=12))

plot(decompose(elecequip))

model <- ets(elecequip) 
plot(forecast(model,h=12))

model <- auto.arima(elecequip) 
model
Series: elecequip 
ARIMA(4,0,1)(0,1,1)[12] 

Coefficients:
           ar1        ar2       ar3        ar4        ma1       sma1
      1.162498  -0.029398  0.223521  -0.383994  -0.595393  -0.810746
s.e.  0.146681   0.140929  0.120976   0.070799   0.156950   0.078211

sigma^2 estimated as 10.9033:  log likelihood=-482.97
AIC=979.93   AICc=980.57   BIC=1002.4
plot(forecast(model,h=12)) 

forecast(arima(elecequip, order=c(4,0,1), seasonal = c(0,1,1)), h=12)
NA
model <- tbats(elecequip, use.box.cox = TRUE, use.damped.trend = F,start.p=2,max.p=2,start.q=1,max.q=1)
plot(forecast(model, h=12))

library(prophet) 
library(TSstudio)
df = ts_to_prophet(elecequip) 
next_time_step = function(y, is_end = F){ 
  if(is_end){ new_start = y }
  else{ new_start = end(y) } 
  if(new_start[2]==12){ 
    new_start[1]=new_start[1]+1 new_start[2]=1 }else{ new_start[2]=new_start[2]+1 } return(new_start)}
---
title: "R Notebook"
output: html_notebook
---

This is an [R Markdown](http://rmarkdown.rstudio.com) Notebook. When you execute code within the notebook, the results appear beneath the code. 

Try executing this chunk by clicking the *Run* button within the chunk or by placing your cursor inside it and pressing *Cmd+Shift+Enter*.


Add a new chunk by clicking the *Insert Chunk* button on the toolbar or by pressing *Cmd+Option+I*.

When you save the notebook, an HTML file containing the code and output will be saved alongside it (click the *Preview* button or press *Cmd+Shift+K* to preview the HTML file). 

The preview shows you a rendered HTML copy of the contents of the editor. Consequently, unlike *Knit*, *Preview* does not run any R code chunks. Instead, the output of the chunk when it was last run in the editor is displayed.

```{r}
plot(decompose(AirPassengers, type = "multiplicative"))
class(AirPassengers)
plot(AirPassengers)

```

```{r}
beerprod <- scan("/Users/ansonleung/Desktop/Applied Financial Engineering/Financial Times Series Analysis/Lecture 1/R Code/beerprod.dat")
beerprod = ts(beerprod, freq = 4)
plot(stl(beerprod, "periodic"))
```
```{r}

trendpattern = filter(beerprod, filter = c(1/8, 1/4, 1/4, 1/4, 1/8), sides=2)
plot(beerprod, type= "b", main = "moving average annual trend")
lines(trendpattern)
```
```{r}
seasonals = beerprod - trendpattern
plot(seasonals, type = "b", main = "Seasonal pattern for beer production")

```
```{r}
trendpattern2 = filter(beerprod, filter = c(1/4, 1/4, 1/4, 1/4), sides=1)
plot(beerprod, type= "b", main = "moving average annual trend")
lines(trendpattern2)
```
```{r}

options(digits = 9)
data(bmw, package = "evir")
Box.test(bmw, lag = 5, type = "Ljung-Box")


```
```{r}
arima(x=bmw, order=c(1,0,0))

fitAR1 = arima(bmw, order = c(1,0,0))
Box.test(residuals(fitAR1), lag = 10, type = "Ljung-Box", fitdf = 1)


Box.test(residuals(fitAR1), lag = 15, type = "Ljung-Box", fitdf = 1)
Box.test(residuals(fitAR1), lag = 20, type = "Ljung-Box", fitdf = 1)
```

```{r}
data(Mishkin, package = "Ecdat")

x = as.ts(Mishkin[,1], start=1950, frequency=12)
acf(x)
pacf(x)

```
```{r}

auto.arima(diff(x), max.p=20, max.q=0,ic="aic")
auto.arima(diff(x), max.p=10, max.q=0,ic="bic")
```

```{r}
data(Mishkin, package = "Ecdat")

y = as.vector(Mishkin[,1])
fitMA3 = arima(diff(y), order = c(0,0,3))
fitMA3 
Box.test(fitMA3$residuals, lag = 5, type = "Ljung", fitdf = 3)
Box.test(fitMA3$residuals, lag = 10, type = "Ljung", fitdf = 3)
Box.test(fitMA3$residuals, lag = 15, type = "Ljung", fitdf = 3)
Box.test(fitMA3$residuals, lag = 20, type = "Ljung", fitdf = 3)

fitMA2 = arima(diff(y), order = c(0,0,2))
fitMA2 
Box.test(fitMA2$residuals, lag = 5, type = "Ljung", fitdf = 2)
Box.test(fitMA2$residuals, lag = 10, type = "Ljung", fitdf = 2)
Box.test(fitMA2$residuals, lag = 15, type = "Ljung", fitdf = 2)
Box.test(fitMA2$residuals, lag = 20, type = "Ljung", fitdf = 2)
```

```{r}
data(Capm, package = "Ecdat")
y = as.ts(Capm[,1], start=1960-01, frequency=12)

fitARMA1 = arima(y, order = c(1,0,1))
fitARMA1


acf(residuals(fitARMA1),lag.max=20,main="ARMA1")
qqnorm(residuals(fitARMA1),main="ARMA1") ; qqline(residuals(fitARMA1))
plot(residuals(fitARMA1))

```

```{r}
polyroot(c(1,-1.229,+0.233))
```

```{r}
data(Mishkin, package = 'Ecdat')
y = as.vector(Mishkin[,1])

adf.test(y)

pp.test(y)

kpss.test(y)
```


```{r}

auto.arima(y, max.p=5, max.q = 5, ic='aic', trace = FALSE)
auto.arima(y, max.p=5, max.q = 5, ic='bic', trace = FALSE)

fitARIMA111= arima(y, c(1,1,1))
par(mfrow=c(1,1))
acf(fitARIMA111$resid)
Box.test(fitARIMA111$resid, lag = 15, fitdf =2)
```
```{r}

data(Hstarts, package="Ecdat") 
x = ts(Hstarts[,1], start=1960, frequency=4)

fit1 = arima(x, c(1,1,1), seasonal = list(order = c(1,1,1), period = 4))
fit1
fit2 = arima(x, c(1,1,1), seasonal = list(order = c(0,1,1), period = 4))
fit2

Box.test(residuals(fit2), type="Ljung-Box", lag = 10, fitdf=3)
Box.test(residuals(fit1), type="Ljung-Box", lag = 10, fitdf=4)
```


```{r}

library("fracdiff")
library("longmemo")

D = c(-.35, .35)

set.seed("09201948")

par(mfrow=c(length(D)+1,2))

for (i in 1:length(D))
{
H = D[i] + 1/2
x = simARMA0(2500,H)
plot(x,main=toString(paste("d =", D[i])) )
acf(x,main=toString(paste("d =", D[i])) )
}

d= .7-1
H = d + 1/2
y = simARMA0(2500,H)
x = cumsum(y)
plot(x,main="d = 0.7",type="l")
acf(x,main="d = 0.7")
```
```{r}

par(mfrow=c(1,2))
acf(diffseries(x,.7),main =expression(paste(Delta^0.7, Y)))
acf(diff(x),main = expression(paste(Delta, Y)))
```

```{r}
n = 10500
set.seed("2340")
e = rnorm(n)
a = e
y = e
sig2= 2^2
omega = 1
alpha =  0.08
beta = 0.90
phi = 0.8
mu = 0.1

for (t in 2:n)
{
a[t] = sqrt(sig2[t])*e[t]
y[t] = mu + phi*(y[t-1]-mu) + a[t]
sig2[t+1] = omega + alpha*a[t]^2 + beta*sig2[t]
}

pdf ("garcho3.pdf", width = 9, height = 6)
#


```


```{r}
par(mfrow=c(2,4))

plot(e[10001:n], type="l",xlab="t", ylab = expression(epsilon), main="(a) white noise")

plot(sqrt(sig2[10001:n]), type="l",xlab="t",ylab=expression(sigma[t]), main="(b) conditional std dev")
plot(a[10001:n], type="l",xlab="t",ylab="a", main="(C) GARCH")
plot(y[10001:n], type="l",xlab="t",ylab="y",main="(d) AR+GARCH")
acf(a[10001:n], main="(e) GARCH")
acf(a[10001:n]^2, main="(f) GARCH squared")
acf(y[10001:n], main="(g) AR+GARCH")
acf(y[10001:n]^2, main="(h) AR+GARCH squared")
#
graphics.off()
```
```{r}
library(xts)
library(fGarch)
garchFit(formula = ~arma(1,0) + garch(1, 1), data = bmw, cond.dist = "norm")
summary(garchFit(formula = ~arma(1, 0) + garch(1, 1), data = bmw, cond.dist = "norm"))
```
```{r}
library(xts)
library(rugarch)
data(bmw, package="evir")
arma.garch.norm = ugarchspec(mean.model=list(armaOrder=c(1,0)), variance.model=list(garchOrder=c(1,1)))
bmw.garch.norm = ugarchfit(data=bmw, spec=arma.garch.norm)
show(bmw.garch.norm)
plot(bmw.garch.norm, which="all")
par(mfrow = c(3,2))
for(i in c(1,3,10,11,8,9)) 
  plot(bmw.garch.norm, which=i)
```

```{r}
library(MASS)
bmw.garch.norm <- garchFit(formula = ~arma(1, 0) + garch(1, 1), data = bmw, cond.dist = "norm")
e = residuals(bmw.garch.norm, standardize=TRUE)
fitdistr(e,"t")
n = length(e)
grid = (1:n)/(n+1)
qqplot(sort(as.numeric(e)), qt(grid,df=4), main="t-plot, df=4",xlab= "Standardized residual quantiles",ylab="t-quantiles")
qqnorm(sort(as.numeric(e)));
qqline(y, col = 2)
```

```{r}
garchFit(formula = ~arma(1, 1) + garch(1, 1), data = bmw,
cond.dist = "std")
```
```{r}
ma2 <- arima.sim(n = 1000, list(ma = c(-0.2279, 0.2488)))
par(mfrow=c(2,1))
plot(ma2)
acf(ma2)
```
```{r}
ar1 <- arima.sim(n = 1000, list(ar = c(0.9))) 
par(mfrow=c(2,1)) 
plot(ar1) 
acf(ar1)
```
```{r}
ar5 <- arima.sim(n = 1000, list(ar = c(-0.2279, 0.2488, -0.5, 0.2, 0.6)))
par(mfrow=c(2,1))
plot(ar5)
acf(ar5)
```
```{r}
library(prophet) 
library(TSstudio)

model <- tbats(elecequip, use.box.cox = TRUE, use.damped.trend = F,start.p=2,max.p=2,start.q=1,max.q=1)
plot(forecast(model, h=12))
```
```{r}
library(fpp2)
plot(elecequip)
```
```{r}

data_points = 1000 
X <- cumsum(rnorm(data_points, 0, 1)) 
Y <- 2*X + 10 + rnorm(data_points, 0, 1)

par(mfrow=c(2,2))
plot(X, main="Time Series X")
plot(diff(X), main="Time Series of First Differences of X") 
plot(Y, main="Time Series Y") 
plot(diff(Y), main="Time Series of First Differences of Y")

```

```{r}
adf.test(X, k=1)
adf.test(diff(X), k=0)

adf.test(Y, k=1)
adf.test(diff(Y), k=0)

```

```{r}
lm(Y~X)

```

```{r}

library(ggplot2) 
data(economics) 
lmMod <- lm(pce ~ pop, data=economics) 
lmMod
acf(lmMod$residuals) # highly autocorrelated from the picture.

```
```{r}

lmtest::dwtest(lmMod)
```

```{r}
scatter.smooth(x=cars$speed, y=cars$dist, main="Dist ~ Speed")

linearMod <- lm(dist ~ speed, data=cars)
linearMod
```
```{r}
# Create Training and Test data - 
set.seed(100) # setting seed to reproduce results of random sampling 
trainingRowIndex <- sample(1:nrow(cars), 0.8*nrow(cars)) # row indices for training data 
trainingData <- cars[trainingRowIndex, ] # model training data 
testData <- cars[-trainingRowIndex, ] # test data

```

```{r}
# Build the model on training data - 
lmMod <- lm(dist ~ speed, data=trainingData) # build the model 
distPred <- predict(lmMod, testData) # predict distance 
actuals_preds <- data.frame(cbind(actuals=testData$dist, predicteds=distPred)) # make actuals_predicteds dataframe 

mean((actuals_preds$predicteds - actuals_preds$actuals)^2)
```

```{r}
library("ggplot2") 
library("tidyverse")

data(cars)
head(cars)
plot(cars)
summary(cars)
boxplot(cars)

```

```{r}

cars.loess <- select(cars, speed, dist)
cars.Train <- cars.loess[1:35,] 
cars.Test <- cars.loess[36:50,]

cars.LoessModel <- loess( dist ~ speed, cars.Train, control = loess.control(surface = "direct") )
cars.LoessModel

forwardSeq <- as.vector(cars.Test$speed)
forwardSeq
cars.Predicted <- predict(cars.LoessModel, data.frame(speed=forwardSeq), se=TRUE )
cars.Predicted
cars.Test$predicted_dist <- cars.Predicted$fit
cars.Test
```
```{r}
cars.TestPlot <- select(cars.Test, speed, predicted_dist) 
cars.TestPlot <- rename(cars.TestPlot, dist = predicted_dist)

ggplot(cars.Train, aes(x = speed, y = dist)) + geom_point() + 
  geom_smooth(stat="smooth", position = "identity", method = "loess", formula = y ~ x, se = TRUE, na.rm = FALSE, inherit.aes = TRUE)+ geom_line(data = cars.TestPlot, aes(y=dist, x=speed, colour="#000099"), show.legend = FALSE, size = 2)
```
```{r}
ggplot(cars.loess, aes(x = speed, y = dist)) + geom_point() + geom_smooth( stat="smooth", position = "identity", method = "loess", formula = y ~ x, se = TRUE, na.rm = FALSE, inherit.aes = TRUE ) + geom_line(data = cars.TestPlot, aes(y=dist, x=speed, colour="#000099"), show.legend = FALSE, size = 2)
```
```{r}

## 70% of the sample size
smp_size <- floor(0.70 * nrow(cars.loess))
## set the seed to make your partition reproducible 
set.seed(123) 
train_ind <- sample(seq_len(nrow(cars.loess)), size = smp_size) 
cars.Train <- cars.loess[train_ind, ] 
cars.Test <- cars.loess[-train_ind, ]

cars.LoessModel <- loess(dist ~ speed, cars.Train, control = loess.control(surface = "direct"))
forwardSeq <- as.vector(cars.Test$speed)
cars.Predicted <- predict(cars.LoessModel, data.frame(speed=forwardSeq), se=TRUE) 
cars.Test$predicted_dist <- cars.Predicted$fit 
cars.TestPlot <- select(cars.Test, speed, predicted_dist) 
cars.TestPlot <- rename(cars.TestPlot, dist = predicted_dist)

ggplot(cars.loess, aes(x = speed, y = dist)) + geom_point() + geom_smooth( stat="smooth", position = "identity", method = "loess", formula = y ~ x, se = TRUE, na.rm = FALSE, inherit.aes = TRUE ) + geom_line(data = cars.TestPlot, aes(y=dist, x=speed, colour="#000099"), show.legend = FALSE, size = 2)

```
```{r}
library(fpp2)
plot(elecequip)

library(forecast)
par(mfrow=c(2,1))
model <- naive(elecequip)
plot(predict(model, n.ahead=24))
model <- snaive(elecequip)
plot(predict(model, n.ahead=24))

```
```{r}
plot(stl(elecequip, t.window=13, s.window="periodic", robust=TRUE))
par(mfrow=c(1,1))
model <- stl(elecequip, t.window=13, s.window="periodic", robust=TRUE)
plot(forecast(model, method = "naive", h=12))

```
```{r}
plot(decompose(elecequip))
```
```{r}
model <- ets(elecequip) 
plot(forecast(model,h=12))
```
```{r}
model <- auto.arima(elecequip) 
model
plot(forecast(model,h=12)) 
forecast(arima(elecequip, order=c(4,0,1), seasonal = c(0,1,1)), h=12)

```
```{r}
model <- tbats(elecequip, use.box.cox = TRUE, use.damped.trend = F,start.p=2,max.p=2,start.q=1,max.q=1)
plot(forecast(model, h=12))
```
```{r}
library(prophet) 
library(TSstudio)
df = ts_to_prophet(elecequip) 
next_time_step = function(y, is_end = F){ 
  if(is_end){ new_start = y }
  else{ new_start = end(y) } 
  if(new_start[2]==12){ 
    new_start[1]=new_start[1]+1 new_start[2]=1 }else{ new_start[2]=new_start[2]+1 } return(new_start)}
```

